In [1]:
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import datetime
import chart_studio.plotly as py
import plotly.graph_objs as go
from plotly.offline import plot
from plotly import __version__
import cufflinks as cf
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
print(__version__)
%matplotlib inline
df=pd.read_csv('Tata-steel.csv')
df.head(10)
5.6.0
Out[1]:
Date Open Price High Price Low Price Close Price WAP No.of Shares No. of Trades Total Turnover (Rs.) Deliverable Quantity % Deli. Qty to Traded Qty Spread High-Low Spread Close-Open
0 03-Jul-15 308.00 308.00 299.75 300.95 302.708180 499554 6430 151219082 118810.0 23.78 8.25 -7.05
1 06-Jul-15 294.90 299.60 292.30 298.75 295.646338 663354 9184 196118181 117663.0 17.74 7.30 3.85
2 07-Jul-15 300.75 303.25 296.00 297.45 299.477691 877207 8581 262703927 211277.0 24.09 7.25 -3.30
3 08-Jul-15 293.80 294.00 282.00 283.40 286.875105 1446189 20272 414875621 563505.0 38.96 12.00 -10.40
4 09-Jul-15 285.20 287.25 279.60 280.55 282.860975 974983 13946 275784642 330248.0 33.87 7.65 -4.65
5 10-Jul-15 282.00 284.60 279.00 280.95 281.406322 702436 9268 197669931 135447.0 19.28 5.60 -1.05
6 13-Jul-15 281.90 285.60 279.15 284.45 282.905447 658957 8413 186422525 136197.0 20.67 6.45 2.55
7 14-Jul-15 285.00 286.90 281.05 281.70 283.275312 561904 7309 159173531 140665.0 25.03 5.85 -3.30
8 15-Jul-15 283.00 284.90 279.40 280.70 281.929804 564323 7346 159099473 139267.0 24.68 5.50 -2.30
9 16-Jul-15 285.00 286.75 280.60 281.50 282.752620 932539 10996 263677846 230537.0 24.72 6.15 -3.50
In [2]:
# To change date in the accepted format
df['Date']=pd.to_datetime(df.Date)
df.head(10)
Out[2]:
Date Open Price High Price Low Price Close Price WAP No.of Shares No. of Trades Total Turnover (Rs.) Deliverable Quantity % Deli. Qty to Traded Qty Spread High-Low Spread Close-Open
0 2015-07-03 308.00 308.00 299.75 300.95 302.708180 499554 6430 151219082 118810.0 23.78 8.25 -7.05
1 2015-07-06 294.90 299.60 292.30 298.75 295.646338 663354 9184 196118181 117663.0 17.74 7.30 3.85
2 2015-07-07 300.75 303.25 296.00 297.45 299.477691 877207 8581 262703927 211277.0 24.09 7.25 -3.30
3 2015-07-08 293.80 294.00 282.00 283.40 286.875105 1446189 20272 414875621 563505.0 38.96 12.00 -10.40
4 2015-07-09 285.20 287.25 279.60 280.55 282.860975 974983 13946 275784642 330248.0 33.87 7.65 -4.65
5 2015-07-10 282.00 284.60 279.00 280.95 281.406322 702436 9268 197669931 135447.0 19.28 5.60 -1.05
6 2015-07-13 281.90 285.60 279.15 284.45 282.905447 658957 8413 186422525 136197.0 20.67 6.45 2.55
7 2015-07-14 285.00 286.90 281.05 281.70 283.275312 561904 7309 159173531 140665.0 25.03 5.85 -3.30
8 2015-07-15 283.00 284.90 279.40 280.70 281.929804 564323 7346 159099473 139267.0 24.68 5.50 -2.30
9 2015-07-16 285.00 286.75 280.60 281.50 282.752620 932539 10996 263677846 230537.0 24.72 6.15 -3.50
In [3]:
df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1485 entries, 0 to 1484
Data columns (total 13 columns):
 #   Column                     Non-Null Count  Dtype         
---  ------                     --------------  -----         
 0   Date                       1485 non-null   datetime64[ns]
 1   Open Price                 1485 non-null   float64       
 2   High Price                 1485 non-null   float64       
 3   Low Price                  1485 non-null   float64       
 4   Close Price                1485 non-null   float64       
 5   WAP                        1485 non-null   float64       
 6   No.of Shares               1485 non-null   int64         
 7   No. of Trades              1485 non-null   int64         
 8   Total Turnover (Rs.)       1485 non-null   int64         
 9   Deliverable Quantity       1484 non-null   float64       
 10  % Deli. Qty to Traded Qty  1484 non-null   float64       
 11  Spread High-Low            1485 non-null   float64       
 12  Spread Close-Open          1485 non-null   float64       
dtypes: datetime64[ns](1), float64(9), int64(3)
memory usage: 150.9 KB
In [4]:
# To check whether we have any missing Data 
sns.heatmap(df.isnull(),yticklabels=False,cbar=False,cmap='viridis')
Out[4]:
<AxesSubplot:>
In [37]:
#EDA
sns.set_palette("GnBu_d")
sns.set_style('whitegrid')
sns.jointplot(x='High Price',y='Low Price',data=df,color='blue')
Out[37]:
<seaborn.axisgrid.JointGrid at 0x1d84e1c3ee0>
In [28]:
df['Open Price'].plot.hist()
Out[28]:
<AxesSubplot:ylabel='Frequency'>
In [29]:
plt.style.use('ggplot')
In [39]:
df['Open Price'].plot.hist(alpha=0.5,bins=25,color='blue')
Out[39]:
<AxesSubplot:ylabel='Frequency'>
In [6]:
df[['Open Price','Close Price','High Price','Close Price']].plot(kind='box')
Out[6]:
<AxesSubplot:>
In [7]:
layout=go.Layout(
    title='Stock price of Tata Steel',
    xaxis=dict(
        title='Date'
       
    ),
    yaxis=dict(
        title='Price'
        
    )
)
df1=[{'x':df['Date'],'y':df['Close Price']}]
plot=go.Figure(data=df1,layout=layout,)
iplot(plot)
In [8]:
#PREDICTION BY A REGRESSION MODEL
#Building the regression MOdel
from sklearn.model_selection import train_test_split

#for preprocessing
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler

#For Model Evaluation
from sklearn.metrics import mean_squared_error as mse
from sklearn.metrics import r2_score
In [9]:
# Split the data into train and test sets
X=np.array(df.index).reshape(-1,1)
Y=df['Close Price']
X_train, X_test, Y_train,Y_test = train_test_split(X,Y,test_size=0.15,random_state=1001)
In [10]:
#feature Scaling
scaler= StandardScaler().fit(X_train)
from sklearn.linear_model import LinearRegression
In [11]:
#creating a linear model
lm=LinearRegression()
lm.fit(X_train,Y_train) 
Out[11]:
LinearRegression()
In [12]:
#Plot Actual And predicted values for train dataset
trace0 = go.Scatter(
x = X_train.T[0],
y=Y_train,
mode='markers',
name='Actual'
)
trace1 = go.Scatter(
x = X_train.T[0],
y=lm.predict(X_train).T,
mode='lines',
name='Predicted'
)
df1=[trace0,trace1]
layout.xaxis.title.text='Day'
plot2 = go.Figure(data=df1,layout=layout)
iplot(plot2)
                
In [13]:
scores =f'''
{'Metric'.ljust(10)}{'Train'.center(20)}{'Test'.center(20)}
{'r2_score'.ljust(10)}{r2_score(Y_train,lm.predict(X_train))}\t{r2_score(Y_test,lm.predict(X_test))}
{'MSE'.ljust(10)}{mse(Y_train,lm.predict(X_train))}\t{mse(Y_test,lm.predict(X_test))}
'''
print(scores)
Metric           Train                Test        
r2_score  0.2113637853338285	0.28138911005029477
MSE       26976.312078813757	28115.874513353803

In [14]:
df1=df.reset_index()['Close Price']
In [15]:
#ANOTHER LINEAR REGRESSION MODEL(Approach 2)

X=df[['Open Price','High Price','Low Price','No.of Shares']]
y=df['Close Price']
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=10)
X_train.shape
Out[15]:
(1113, 4)
In [16]:
from sklearn.linear_model import LinearRegression
lm=LinearRegression()
lm.fit(X_train,y_train)
Out[16]:
LinearRegression()
In [17]:
# The coefficients
print('Coefficients: \n', lm.coef_)
Coefficients: 
 [-5.91825043e-01  8.63147499e-01  7.26487405e-01 -1.80797315e-07]
In [18]:
predictions = lm.predict( 
    X_test)
In [35]:
plt.scatter(y_test,predictions,color='blue')
plt.xlabel('Y Test',fontsize=20,color='red')
plt.ylabel('Predicted Y',fontsize=20)
plt.plot
Out[35]:
<function matplotlib.pyplot.plot(*args, scalex=True, scaley=True, data=None, **kwargs)>
In [40]:
# calculate these metrics by hand!
from sklearn import metrics

print('MAE:', metrics.mean_absolute_error(y_test, predictions))
print('MSE:', metrics.mean_squared_error(y_test, predictions))
print('RMSE:', np.sqrt(metrics.mean_squared_error(y_test, predictions)))
MAE: 2.8607786203751284
MSE: 18.917985128026437
RMSE: 4.349481018239583
In [41]:
scores =f'''
{'Metric'.ljust(10)}{'Train'.center(20)}{'Test'.center(20)}
{'r2_score'.ljust(10)}{r2_score(y_train,lm.predict(X_train))}\t{r2_score(y_test,lm.predict(X_test))}
{'MSE'.ljust(10)}{mse(y_train,lm.predict(X_train))}\t{mse(y_test,lm.predict(X_test))}
'''
print(scores)
Metric           Train                Test        
r2_score  0.9995360586342691	0.9995019419848405
MSE       15.741870667697818	18.917985128026437

In [42]:
#NOW WE WOLL DO PRDCTION USING TENSORFLOW AND PREDICT THE PRICE FOR NEXT 10 DAYS

from sklearn.preprocessing import MinMaxScaler
scaler=MinMaxScaler(feature_range=(0,1))
df1=scaler.fit_transform(np.array(df1).reshape(-1,1))
In [43]:
print (df1)
[[0.09646022]
 [0.09432957]
 [0.09307055]
 ...
 [0.93506368]
 [0.93167401]
 [0.90513777]]
In [44]:
training_size=int(len(df1)*0.65)
test_size=len(df1)-training_size
train_data,test_data=df1[0:training_size,:],df1[training_size:len(df1),:1]
In [45]:
training_size,test_size
Out[45]:
(965, 520)
In [46]:
train_data
Out[46]:
array([[0.09646022],
       [0.09432957],
       [0.09307055],
       [0.07946346],
       [0.07670331],
       [0.0770907 ],
       [0.08048036],
       [0.07781705],
       [0.07684858],
       [0.07762336],
       [0.08033509],
       [0.08004455],
       [0.0709893 ],
       [0.07650961],
       [0.06677643],
       [0.06174035],
       [0.04847223],
       [0.04648685],
       [0.04774587],
       [0.04551838],
       [0.04450148],
       [0.04517941],
       [0.05331461],
       [0.05898019],
       [0.05806014],
       [0.0587865 ],
       [0.05791487],
       [0.04411409],
       [0.04624473],
       [0.03123335],
       [0.03462302],
       [0.04377512],
       [0.04871435],
       [0.04701951],
       [0.03902959],
       [0.03476829],
       [0.00464869],
       [0.01239649],
       [0.01370394],
       [0.02372766],
       [0.02629413],
       [0.02295288],
       [0.01438187],
       [0.01772311],
       [0.02740787],
       [0.01704518],
       [0.01409133],
       [0.02658467],
       [0.03670524],
       [0.03772214],
       [0.03070069],
       [0.03869062],
       [0.02682679],
       [0.02794054],
       [0.02634255],
       [0.02469614],
       [0.01806208],
       [0.0191274 ],
       [0.01360709],
       [0.00784466],
       [0.        ],
       [0.01021742],
       [0.01045954],
       [0.02242022],
       [0.02498668],
       [0.03433248],
       [0.03791584],
       [0.04784272],
       [0.04823011],
       [0.04246768],
       [0.04517941],
       [0.05224929],
       [0.04968282],
       [0.04532468],
       [0.03820638],
       [0.04208029],
       [0.04329088],
       [0.0465837 ],
       [0.04319403],
       [0.04445305],
       [0.04459832],
       [0.04396882],
       [0.03612416],
       [0.03360612],
       [0.033364  ],
       [0.02348555],
       [0.01815893],
       [0.0201443 ],
       [0.0160767 ],
       [0.01888528],
       [0.0201443 ],
       [0.02755315],
       [0.03210498],
       [0.02372766],
       [0.02668152],
       [0.0274563 ],
       [0.02232337],
       [0.02227495],
       [0.02571304],
       [0.02890901],
       [0.02779526],
       [0.03515568],
       [0.04082127],
       [0.03757687],
       [0.0376253 ],
       [0.04086969],
       [0.0323471 ],
       [0.02469614],
       [0.03060384],
       [0.03830323],
       [0.04232241],
       [0.0414992 ],
       [0.04217713],
       [0.05345988],
       [0.05229771],
       [0.05539683],
       [0.05408939],
       [0.06038449],
       [0.06019079],
       [0.05103869],
       [0.05210401],
       [0.05500944],
       [0.05636531],
       [0.05418624],
       [0.0538957 ],
       [0.07045664],
       [0.06503317],
       [0.04721321],
       [0.05069972],
       [0.04905331],
       [0.04358142],
       [0.04421093],
       [0.03617258],
       [0.02764999],
       [0.03379982],
       [0.03544623],
       [0.02939325],
       [0.03215341],
       [0.04430778],
       [0.050845  ],
       [0.05442836],
       [0.05137766],
       [0.04716479],
       [0.04653528],
       [0.02919955],
       [0.02193598],
       [0.02401821],
       [0.03162074],
       [0.03205656],
       [0.03312188],
       [0.03321873],
       [0.02290446],
       [0.01597986],
       [0.04343615],
       [0.04212871],
       [0.04842381],
       [0.0476006 ],
       [0.05026391],
       [0.05215244],
       [0.0504576 ],
       [0.04503414],
       [0.04522783],
       [0.0455668 ],
       [0.04624473],
       [0.05379885],
       [0.06372573],
       [0.08227204],
       [0.08425742],
       [0.08764709],
       [0.0911336 ],
       [0.09181153],
       [0.09016513],
       [0.09195681],
       [0.09534647],
       [0.09476539],
       [0.09147257],
       [0.09747712],
       [0.09834875],
       [0.10469227],
       [0.11161687],
       [0.09558859],
       [0.09931722],
       [0.11917098],
       [0.11442545],
       [0.11268219],
       [0.11510338],
       [0.10725873],
       [0.12309331],
       [0.11873517],
       [0.11708876],
       [0.1257082 ],
       [0.11892887],
       [0.12711249],
       [0.12943683],
       [0.15059803],
       [0.14822527],
       [0.14764418],
       [0.14159121],
       [0.15209917],
       [0.15035591],
       [0.14173648],
       [0.14449663],
       [0.14086485],
       [0.14246283],
       [0.12357755],
       [0.1265314 ],
       [0.12406179],
       [0.1236744 ],
       [0.12260907],
       [0.12362597],
       [0.12536923],
       [0.11781512],
       [0.12105951],
       [0.1216406 ],
       [0.12352913],
       [0.12081739],
       [0.11771827],
       [0.10871144],
       [0.11258535],
       [0.11912256],
       [0.11975207],
       [0.12362597],
       [0.11791197],
       [0.12890417],
       [0.12667667],
       [0.13490872],
       [0.13297177],
       [0.13205172],
       [0.13703937],
       [0.13573193],
       [0.13626459],
       [0.12938841],
       [0.1185899 ],
       [0.11975207],
       [0.1244976 ],
       [0.12464287],
       [0.12023631],
       [0.13055058],
       [0.12832308],
       [0.1265314 ],
       [0.12822624],
       [0.10764612],
       [0.1055639 ],
       [0.10662922],
       [0.10856617],
       [0.11674979],
       [0.12202799],
       [0.12575662],
       [0.1285652 ],
       [0.11263377],
       [0.11355382],
       [0.12193114],
       [0.13660355],
       [0.15142124],
       [0.15287395],
       [0.16609365],
       [0.1590722 ],
       [0.16357561],
       [0.16183236],
       [0.15587623],
       [0.15878166],
       [0.15732894],
       [0.16105758],
       [0.15645731],
       [0.14425452],
       [0.14856423],
       [0.1578616 ],
       [0.15548884],
       [0.15132439],
       [0.16725582],
       [0.17388988],
       [0.17258244],
       [0.16861169],
       [0.16347877],
       [0.16188078],
       [0.16798218],
       [0.17224347],
       [0.18401046],
       [0.17999128],
       [0.18493051],
       [0.1802334 ],
       [0.17829645],
       [0.17316353],
       [0.16565784],
       [0.16318822],
       [0.16633577],
       [0.17088761],
       [0.16376931],
       [0.1670137 ],
       [0.16691686],
       [0.17868384],
       [0.18265459],
       [0.19519636],
       [0.18706116],
       [0.16682001],
       [0.16696528],
       [0.16033122],
       [0.15272868],
       [0.15514987],
       [0.15892693],
       [0.16265556],
       [0.16943489],
       [0.1651736 ],
       [0.16372089],
       [0.16125127],
       [0.17282456],
       [0.16125127],
       [0.16759479],
       [0.17577841],
       [0.18279986],
       [0.18304198],
       [0.18202508],
       [0.19858602],
       [0.20923926],
       [0.20580117],
       [0.20391264],
       [0.20275047],
       [0.21650283],
       [0.21703549],
       [0.21994092],
       [0.21882718],
       [0.21776185],
       [0.20739916],
       [0.19127403],
       [0.1895792 ],
       [0.1966975 ],
       [0.19689119],
       [0.2095298 ],
       [0.20560748],
       [0.19558375],
       [0.19534163],
       [0.20352525],
       [0.20071667],
       [0.19219408],
       [0.22807612],
       [0.21839136],
       [0.18614111],
       [0.18439785],
       [0.18556002],
       [0.17795748],
       [0.16483463],
       [0.1710813 ],
       [0.18497894],
       [0.19069294],
       [0.19984504],
       [0.19926396],
       [0.20115249],
       [0.2068665 ],
       [0.201782  ],
       [0.19858602],
       [0.20551063],
       [0.21015931],
       [0.20449373],
       [0.2229432 ],
       [0.2217326 ],
       [0.2156312 ],
       [0.21238681],
       [0.20938453],
       [0.2109341 ],
       [0.20604329],
       [0.20304101],
       [0.19616483],
       [0.19713331],
       [0.18502736],
       [0.18342937],
       [0.17345407],
       [0.18536633],
       [0.18105661],
       [0.18507578],
       [0.18376834],
       [0.19815021],
       [0.19543848],
       [0.19732701],
       [0.2109341 ],
       [0.21146676],
       [0.21374268],
       [0.22231369],
       [0.23897148],
       [0.23863251],
       [0.23732507],
       [0.24894678],
       [0.24487918],
       [0.25785676],
       [0.25737252],
       [0.24526657],
       [0.25209433],
       [0.25461237],
       [0.25916421],
       [0.25945475],
       [0.25238487],
       [0.25282069],
       [0.2634255 ],
       [0.2664762 ],
       [0.26206963],
       [0.26269914],
       [0.26332865],
       [0.26085904],
       [0.2503995 ],
       [0.25800203],
       [0.2626023 ],
       [0.26236018],
       [0.25059319],
       [0.25998741],
       [0.25819573],
       [0.27635466],
       [0.28013171],
       [0.27306184],
       [0.27480509],
       [0.27068907],
       [0.27243233],
       [0.28952593],
       [0.28444143],
       [0.28439301],
       [0.28240763],
       [0.27180282],
       [0.26298969],
       [0.25969687],
       [0.25805046],
       [0.26279599],
       [0.26846158],
       [0.28841218],
       [0.29122076],
       [0.28608784],
       [0.28594257],
       [0.27834003],
       [0.28482882],
       [0.28264975],
       [0.26758995],
       [0.26976902],
       [0.27311026],
       [0.26807419],
       [0.27243233],
       [0.27504721],
       [0.28279502],
       [0.29083337],
       [0.28235921],
       [0.28482882],
       [0.27790422],
       [0.26788049],
       [0.25437025],
       [0.25248172],
       [0.2410537 ],
       [0.24221587],
       [0.24468549],
       [0.24168321],
       [0.24173164],
       [0.24463706],
       [0.24492761],
       [0.23669556],
       [0.23969784],
       [0.23621132],
       [0.23175633],
       [0.23369328],
       [0.22434749],
       [0.22337901],
       [0.2247833 ],
       [0.22909302],
       [0.23151421],
       [0.22797927],
       [0.24638032],
       [0.24759092],
       [0.28279502],
       [0.2764515 ],
       [0.27896954],
       [0.27625781],
       [0.27993802],
       [0.27272287],
       [0.27499879],
       [0.30066341],
       [0.2967895 ],
       [0.29959808],
       [0.29398092],
       [0.28720159],
       [0.28110019],
       [0.2807128 ],
       [0.27955063],
       [0.28056753],
       [0.28821849],
       [0.29533679],
       [0.29635369],
       [0.29756428],
       [0.29199555],
       [0.29180185],
       [0.29126919],
       [0.30792698],
       [0.31073556],
       [0.30632899],
       [0.30332672],
       [0.29640211],
       [0.29935596],
       [0.30850806],
       [0.32284151],
       [0.3321873 ],
       [0.33707811],
       [0.33412426],
       [0.33741707],
       [0.34187206],
       [0.3413394 ],
       [0.34656917],
       [0.34448695],
       [0.34623021],
       [0.35024938],
       [0.34685972],
       [0.34623021],
       [0.35005569],
       [0.35799719],
       [0.3433732 ],
       [0.34017723],
       [0.33518958],
       [0.34066147],
       [0.35257373],
       [0.34831243],
       [0.33920876],
       [0.35465595],
       [0.35896567],
       [0.35504334],
       [0.34690814],
       [0.36235533],
       [0.386083  ],
       [0.4013365 ],
       [0.39092538],
       [0.39048956],
       [0.38235437],
       [0.40666312],
       [0.41489516],
       [0.41111811],
       [0.41068229],
       [0.40734105],
       [0.40477459],
       [0.4184301 ],
       [0.42380514],
       [0.42162607],
       [0.41528255],
       [0.4235146 ],
       [0.42288509],
       [0.43440996],
       [0.43072975],
       [0.4355237 ],
       [0.43591109],
       [0.43891337],
       [0.43983342],
       [0.44545058],
       [0.46661179],
       [0.46327054],
       [0.46128517],
       [0.46288315],
       [0.45683018],
       [0.46021984],
       [0.47096993],
       [0.47019515],
       [0.43891337],
       [0.42496731],
       [0.43852598],
       [0.43290882],
       [0.43445838],
       [0.4355237 ],
       [0.44482107],
       [0.44293255],
       [0.44438526],
       [0.47460171],
       [0.48017045],
       [0.47387536],
       [0.46452956],
       [0.47406905],
       [0.49319646],
       [0.49784514],
       [0.49498814],
       [0.49198586],
       [0.4871919 ],
       [0.49392281],
       [0.50171905],
       [0.49949155],
       [0.51145223],
       [0.50835311],
       [0.50113796],
       [0.48646555],
       [0.48975837],
       [0.48874147],
       [0.49155005],
       [0.49140477],
       [0.47571546],
       [0.4749891 ],
       [0.48612658],
       [0.4843349 ],
       [0.47406905],
       [0.46859716],
       [0.46167256],
       [0.47000145],
       [0.48423805],
       [0.48360854],
       [0.49382596],
       [0.49416493],
       [0.49382596],
       [0.4904363 ],
       [0.48472229],
       [0.48704663],
       [0.49193744],
       [0.4782335 ],
       [0.46423902],
       [0.47101835],
       [0.45963876],
       [0.4519878 ],
       [0.47121205],
       [0.47987991],
       [0.48293061],
       [0.47247107],
       [0.46559489],
       [0.46922667],
       [0.4780398 ],
       [0.48748245],
       [0.49290591],
       [0.48540022],
       [0.49794199],
       [0.49319646],
       [0.503656  ],
       [0.50404339],
       [0.51648831],
       [0.51435766],
       [0.50588349],
       [0.51111326],
       [0.51518086],
       [0.54176553],
       [0.55101448],
       [0.54970704],
       [0.55295143],
       [0.55251562],
       [0.5522735 ],
       [0.55174084],
       [0.56350782],
       [0.54714057],
       [0.55483996],
       [0.53314609],
       [0.5329524 ],
       [0.53338821],
       [0.56045712],
       [0.54433199],
       [0.54975546],
       [0.56307201],
       [0.55609898],
       [0.48782141],
       [0.47934725],
       [0.45358578],
       [0.44859813],
       [0.44898552],
       [0.44472423],
       [0.45518377],
       [0.46709602],
       [0.49503656],
       [0.49101738],
       [0.48278534],
       [0.47159944],
       [0.43281197],
       [0.43523316],
       [0.4253547 ],
       [0.42273982],
       [0.46143044],
       [0.46380321],
       [0.45566801],
       [0.45552273],
       [0.45900925],
       [0.43968815],
       [0.4446758 ],
       [0.43256985],
       [0.42017336],
       [0.39150646],
       [0.40806741],
       [0.41349087],
       [0.40588833],
       [0.39712363],
       [0.38656724],
       [0.36191952],
       [0.37959421],
       [0.3689894 ],
       [0.36724614],
       [0.3537359 ],
       [0.36908624],
       [0.37644666],
       [0.35790034],
       [0.36530919],
       [0.36622924],
       [0.34777977],
       [0.36768195],
       [0.3728633 ],
       [0.37305699],
       [0.38772941],
       [0.39354026],
       [0.38467871],
       [0.38153116],
       [0.37872258],
       [0.38206382],
       [0.38753571],
       [0.40598518],
       [0.39223282],
       [0.39160331],
       [0.38438817],
       [0.37271803],
       [0.36579342],
       [0.37605927],
       [0.38095008],
       [0.36191952],
       [0.37218537],
       [0.3679725 ],
       [0.38216067],
       [0.38235437],
       [0.38845576],
       [0.38003002],
       [0.39252336],
       [0.39586461],
       [0.40937485],
       [0.4074379 ],
       [0.39581618],
       [0.37785095],
       [0.3637112 ],
       [0.36400174],
       [0.32724808],
       [0.33610963],
       [0.35431698],
       [0.36245218],
       [0.36211321],
       [0.36487337],
       [0.36230691],
       [0.34768292],
       [0.35407486],
       [0.35891724],
       [0.36642293],
       [0.38739044],
       [0.38559876],
       [0.37518764],
       [0.36651978],
       [0.35460753],
       [0.35644763],
       [0.35310639],
       [0.34608494],
       [0.3423563 ],
       [0.35053993],
       [0.3456007 ],
       [0.34777977],
       [0.34589124],
       [0.33702968],
       [0.33112198],
       [0.33577066],
       [0.35494649],
       [0.34826401],
       [0.35363905],
       [0.35596339],
       [0.34104886],
       [0.34167837],
       [0.34429325],
       [0.35567285],
       [0.34332478],
       [0.34376059],
       [0.34555227],
       [0.30792698],
       [0.32071086],
       [0.29378723],
       [0.28899327],
       [0.29257663],
       [0.30555421],
       [0.3150937 ],
       [0.32269624],
       [0.32308363],
       [0.33712653],
       [0.34017723],
       [0.35010411],
       [0.34216261],
       [0.34201734],
       [0.34259842],
       [0.34095201],
       [0.36032153],
       [0.36443756],
       [0.3716527 ],
       [0.36240376],
       [0.35625393],
       [0.36564815],
       [0.35518861],
       [0.36729456],
       [0.38550191],
       [0.36889255],
       [0.35848143],
       [0.36230691],
       [0.36773038],
       [0.37576873],
       [0.38032057],
       [0.39412135],
       [0.38768098],
       [0.38656724],
       [0.37935209],
       [0.38017529],
       [0.38651881],
       [0.40448404],
       [0.39915743],
       [0.37862573],
       [0.39140962],
       [0.40094911],
       [0.40123965],
       [0.39814053],
       [0.40588833],
       [0.40985909],
       [0.39247494],
       [0.38603457],
       [0.39484771],
       [0.39567091],
       [0.36705244],
       [0.37010314],
       [0.36642293],
       [0.36855358],
       [0.3565929 ],
       [0.34715026],
       [0.35925621],
       [0.36966733],
       [0.34371217],
       [0.36182267],
       [0.35799719],
       [0.36099947],
       [0.34216261],
       [0.34114571],
       [0.33276839],
       [0.33388214],
       [0.3374655 ],
       [0.33431795],
       [0.33872452],
       [0.36051523],
       [0.35228318],
       [0.33998354],
       [0.34409956],
       [0.3606605 ],
       [0.36497022],
       [0.36380805],
       [0.36714929],
       [0.36022469],
       [0.36952206],
       [0.37557503],
       [0.37538134],
       [0.37509079],
       [0.36099947],
       [0.36884412],
       [0.35073362],
       [0.34012881],
       [0.32792601],
       [0.31935499],
       [0.31063871],
       [0.31247881],
       [0.32163091],
       [0.31853179],
       [0.3252627 ],
       [0.32632802],
       [0.30405307],
       [0.29993705],
       [0.29688635],
       [0.29122076],
       [0.29078495],
       [0.3089923 ],
       [0.30109922],
       [0.30221297],
       [0.31155876],
       [0.31538424],
       [0.31519055],
       [0.31194615],
       [0.30870176],
       [0.30318144],
       [0.30245509],
       [0.2967895 ],
       [0.30250351],
       [0.31039659],
       [0.30429519],
       [0.28327926],
       [0.27088277],
       [0.27732313],
       [0.2756283 ],
       [0.28085807],
       [0.26923636],
       [0.27015641],
       [0.26473294],
       [0.26085904],
       [0.26516876],
       [0.26449082],
       [0.2626023 ],
       [0.26066534],
       [0.26168224],
       [0.24739722],
       [0.25417655],
       [0.25127112],
       [0.24337804],
       [0.23659871],
       [0.23572708],
       [0.25785676],
       [0.26594354],
       [0.26347392],
       [0.26579827],
       [0.25509661],
       [0.27412716],
       [0.27722628],
       [0.25974529],
       [0.27025326],
       [0.27756525],
       [0.27315868],
       [0.27228706],
       [0.25771149],
       [0.25606508],
       [0.26129485],
       [0.28013171],
       [0.28720159],
       [0.29112392],
       [0.2939325 ],
       [0.29325456],
       [0.29490097],
       [0.29049441],
       [0.29615999],
       [0.31005762],
       [0.30744274],
       [0.30928284],
       [0.29703162],
       [0.30937969],
       [0.31262409],
       [0.30240666],
       [0.30647426],
       [0.30429519],
       [0.31310832],
       [0.31465789],
       [0.30235824],
       [0.30681323],
       [0.30439204],
       [0.3079754 ],
       [0.30463416],
       [0.29601472],
       [0.30942811],
       [0.32284151],
       [0.31935499],
       [0.32351944],
       [0.31882233],
       [0.33606121],
       [0.33673914],
       [0.33940245],
       [0.33204203],
       [0.32521428],
       [0.32192146],
       [0.33959615],
       [0.34022565],
       [0.33073459],
       [0.32342259],
       [0.31228512],
       [0.31373783],
       [0.29979178],
       [0.33281681],
       [0.34390586],
       [0.3423563 ],
       [0.33533485],
       [0.32366471],
       [0.31891918],
       [0.31199458],
       [0.30758801],
       [0.27693574],
       [0.26173067],
       [0.25993899],
       [0.25359547],
       [0.25945475],
       [0.25974529],
       [0.27291657],
       [0.26357077],
       [0.26376447],
       [0.25621035],
       [0.27325553],
       [0.30032444],
       [0.29630526],
       [0.2827466 ]])
In [47]:
import numpy
# convert an array of values into a dataset matrix
def create_dataset(dataset, time_step=1):
	dataX, dataY = [], []
	for i in range(len(dataset)-time_step-1):
		a = dataset[i:(i+time_step), 0]   ###i=0, 0,1,2,3-----99   100 
		dataX.append(a)
		dataY.append(dataset[i + time_step, 0])
	return numpy.array(dataX), numpy.array(dataY)
In [48]:
# reshape into X=t,t+1,t+2,t+3 and Y=t+4
time_step = 100
X_train, y_train = create_dataset(train_data, time_step)
X_test, ytest = create_dataset(test_data, time_step)
In [49]:
print(X_test.shape), print(ytest.shape)
(419, 100)
(419,)
Out[49]:
(None, None)
In [50]:
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)
In [51]:
### Create the Stacked LSTM model
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
In [52]:
model=Sequential()
model.add(LSTM(50,return_sequences=True,input_shape=(100,1)))
model.add(LSTM(50,return_sequences=True))
model.add(LSTM(50))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
In [58]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm (LSTM)                 (None, 100, 50)           10400     
                                                                 
 lstm_1 (LSTM)               (None, 100, 50)           20200     
                                                                 
 lstm_2 (LSTM)               (None, 50)                20200     
                                                                 
 dense (Dense)               (None, 1)                 51        
                                                                 
=================================================================
Total params: 50,851
Trainable params: 50,851
Non-trainable params: 0
_________________________________________________________________
In [59]:
model.fit(X_train,y_train,validation_data=(X_test,ytest),epochs=100,batch_size=64,verbose=1)
Epoch 1/100
14/14 [==============================] - 7s 189ms/step - loss: 0.0216 - val_loss: 0.0306
Epoch 2/100
14/14 [==============================] - 2s 130ms/step - loss: 0.0039 - val_loss: 0.0121
Epoch 3/100
14/14 [==============================] - 2s 131ms/step - loss: 0.0017 - val_loss: 0.0052
Epoch 4/100
14/14 [==============================] - 2s 132ms/step - loss: 7.9742e-04 - val_loss: 0.0036
Epoch 5/100
14/14 [==============================] - 2s 117ms/step - loss: 7.2694e-04 - val_loss: 0.0039
Epoch 6/100
14/14 [==============================] - 2s 122ms/step - loss: 7.0373e-04 - val_loss: 0.0043
Epoch 7/100
14/14 [==============================] - 2s 121ms/step - loss: 6.7829e-04 - val_loss: 0.0043
Epoch 8/100
14/14 [==============================] - 2s 116ms/step - loss: 6.9737e-04 - val_loss: 0.0050
Epoch 9/100
14/14 [==============================] - 2s 127ms/step - loss: 6.7471e-04 - val_loss: 0.0042
Epoch 10/100
14/14 [==============================] - 2s 122ms/step - loss: 6.6030e-04 - val_loss: 0.0039
Epoch 11/100
14/14 [==============================] - 2s 132ms/step - loss: 6.3781e-04 - val_loss: 0.0042
Epoch 12/100
14/14 [==============================] - 2s 163ms/step - loss: 6.2818e-04 - val_loss: 0.0037
Epoch 13/100
14/14 [==============================] - 2s 142ms/step - loss: 6.1883e-04 - val_loss: 0.0034
Epoch 14/100
14/14 [==============================] - 2s 125ms/step - loss: 6.1132e-04 - val_loss: 0.0036
Epoch 15/100
14/14 [==============================] - 2s 120ms/step - loss: 5.9963e-04 - val_loss: 0.0034
Epoch 16/100
14/14 [==============================] - 2s 116ms/step - loss: 6.0136e-04 - val_loss: 0.0037
Epoch 17/100
14/14 [==============================] - 2s 117ms/step - loss: 6.3481e-04 - val_loss: 0.0032
Epoch 18/100
14/14 [==============================] - 2s 117ms/step - loss: 5.7008e-04 - val_loss: 0.0031
Epoch 19/100
14/14 [==============================] - 2s 117ms/step - loss: 5.7846e-04 - val_loss: 0.0033
Epoch 20/100
14/14 [==============================] - 2s 115ms/step - loss: 5.8275e-04 - val_loss: 0.0029
Epoch 21/100
14/14 [==============================] - 2s 138ms/step - loss: 5.2693e-04 - val_loss: 0.0025
Epoch 22/100
14/14 [==============================] - 2s 138ms/step - loss: 5.1990e-04 - val_loss: 0.0028
Epoch 23/100
14/14 [==============================] - 2s 155ms/step - loss: 5.0517e-04 - val_loss: 0.0026
Epoch 24/100
14/14 [==============================] - 2s 131ms/step - loss: 5.0354e-04 - val_loss: 0.0029
Epoch 25/100
14/14 [==============================] - 2s 125ms/step - loss: 5.0409e-04 - val_loss: 0.0023
Epoch 26/100
14/14 [==============================] - 2s 139ms/step - loss: 4.9685e-04 - val_loss: 0.0027
Epoch 27/100
14/14 [==============================] - 2s 132ms/step - loss: 5.3307e-04 - val_loss: 0.0024
Epoch 28/100
14/14 [==============================] - 2s 134ms/step - loss: 4.8449e-04 - val_loss: 0.0019
Epoch 29/100
14/14 [==============================] - 2s 125ms/step - loss: 4.7568e-04 - val_loss: 0.0020
Epoch 30/100
14/14 [==============================] - 2s 141ms/step - loss: 4.7949e-04 - val_loss: 0.0022
Epoch 31/100
14/14 [==============================] - 2s 133ms/step - loss: 4.5560e-04 - val_loss: 0.0017
Epoch 32/100
14/14 [==============================] - 2s 119ms/step - loss: 4.7166e-04 - val_loss: 0.0015
Epoch 33/100
14/14 [==============================] - 2s 118ms/step - loss: 4.8998e-04 - val_loss: 0.0016
Epoch 34/100
14/14 [==============================] - 2s 118ms/step - loss: 4.5278e-04 - val_loss: 0.0029
Epoch 35/100
14/14 [==============================] - 2s 134ms/step - loss: 4.6314e-04 - val_loss: 0.0027
Epoch 36/100
14/14 [==============================] - 2s 140ms/step - loss: 4.9327e-04 - val_loss: 0.0021
Epoch 37/100
14/14 [==============================] - 2s 117ms/step - loss: 4.3201e-04 - val_loss: 0.0018
Epoch 38/100
14/14 [==============================] - 2s 150ms/step - loss: 3.9871e-04 - val_loss: 0.0020
Epoch 39/100
14/14 [==============================] - 2s 137ms/step - loss: 4.1456e-04 - val_loss: 0.0020
Epoch 40/100
14/14 [==============================] - 2s 126ms/step - loss: 4.5849e-04 - val_loss: 0.0021
Epoch 41/100
14/14 [==============================] - 2s 116ms/step - loss: 4.3615e-04 - val_loss: 0.0017
Epoch 42/100
14/14 [==============================] - 2s 116ms/step - loss: 3.8359e-04 - val_loss: 0.0015
Epoch 43/100
14/14 [==============================] - 2s 116ms/step - loss: 3.5947e-04 - val_loss: 0.0015
Epoch 44/100
14/14 [==============================] - 2s 119ms/step - loss: 3.5385e-04 - val_loss: 0.0016
Epoch 45/100
14/14 [==============================] - 2s 118ms/step - loss: 3.4993e-04 - val_loss: 0.0016
Epoch 46/100
14/14 [==============================] - 2s 120ms/step - loss: 3.6205e-04 - val_loss: 0.0014
Epoch 47/100
14/14 [==============================] - 2s 117ms/step - loss: 3.2851e-04 - val_loss: 0.0013
Epoch 48/100
14/14 [==============================] - 2s 129ms/step - loss: 3.2961e-04 - val_loss: 0.0015
Epoch 49/100
14/14 [==============================] - 2s 138ms/step - loss: 3.2473e-04 - val_loss: 0.0012
Epoch 50/100
14/14 [==============================] - 2s 122ms/step - loss: 3.2092e-04 - val_loss: 0.0014
Epoch 51/100
14/14 [==============================] - 2s 132ms/step - loss: 3.1836e-04 - val_loss: 0.0016
Epoch 52/100
14/14 [==============================] - 2s 133ms/step - loss: 3.1662e-04 - val_loss: 9.9691e-04
Epoch 53/100
14/14 [==============================] - 2s 135ms/step - loss: 3.5311e-04 - val_loss: 0.0011
Epoch 54/100
14/14 [==============================] - 2s 128ms/step - loss: 3.0084e-04 - val_loss: 0.0011
Epoch 55/100
14/14 [==============================] - 2s 126ms/step - loss: 2.9835e-04 - val_loss: 0.0014
Epoch 56/100
14/14 [==============================] - 2s 125ms/step - loss: 3.0461e-04 - val_loss: 0.0012
Epoch 57/100
14/14 [==============================] - 2s 135ms/step - loss: 3.0464e-04 - val_loss: 9.2226e-04
Epoch 58/100
14/14 [==============================] - 2s 138ms/step - loss: 3.3423e-04 - val_loss: 0.0012
Epoch 59/100
14/14 [==============================] - 2s 130ms/step - loss: 2.9653e-04 - val_loss: 0.0014
Epoch 60/100
14/14 [==============================] - 2s 124ms/step - loss: 3.2118e-04 - val_loss: 0.0013
Epoch 61/100
14/14 [==============================] - 2s 129ms/step - loss: 2.8639e-04 - val_loss: 0.0012
Epoch 62/100
14/14 [==============================] - 2s 118ms/step - loss: 2.7868e-04 - val_loss: 0.0010
Epoch 63/100
14/14 [==============================] - 2s 133ms/step - loss: 3.0204e-04 - val_loss: 0.0012
Epoch 64/100
14/14 [==============================] - 2s 143ms/step - loss: 3.1422e-04 - val_loss: 0.0015
Epoch 65/100
14/14 [==============================] - 2s 128ms/step - loss: 3.4381e-04 - val_loss: 0.0013
Epoch 66/100
14/14 [==============================] - 2s 154ms/step - loss: 2.7934e-04 - val_loss: 8.5070e-04
Epoch 67/100
14/14 [==============================] - 2s 142ms/step - loss: 2.8754e-04 - val_loss: 9.2565e-04
Epoch 68/100
14/14 [==============================] - 2s 125ms/step - loss: 2.7482e-04 - val_loss: 0.0012
Epoch 69/100
14/14 [==============================] - 2s 137ms/step - loss: 2.8165e-04 - val_loss: 0.0012
Epoch 70/100
14/14 [==============================] - 2s 130ms/step - loss: 2.8345e-04 - val_loss: 0.0016
Epoch 71/100
14/14 [==============================] - 2s 126ms/step - loss: 2.9024e-04 - val_loss: 7.6359e-04
Epoch 72/100
14/14 [==============================] - 2s 126ms/step - loss: 2.6435e-04 - val_loss: 9.8066e-04
Epoch 73/100
14/14 [==============================] - 2s 127ms/step - loss: 2.5267e-04 - val_loss: 7.1246e-04
Epoch 74/100
14/14 [==============================] - 2s 144ms/step - loss: 2.8395e-04 - val_loss: 7.7438e-04
Epoch 75/100
14/14 [==============================] - 2s 137ms/step - loss: 2.6123e-04 - val_loss: 8.8154e-04
Epoch 76/100
14/14 [==============================] - 2s 130ms/step - loss: 2.4242e-04 - val_loss: 8.9714e-04
Epoch 77/100
14/14 [==============================] - 2s 119ms/step - loss: 2.4386e-04 - val_loss: 0.0011
Epoch 78/100
14/14 [==============================] - 2s 119ms/step - loss: 2.4902e-04 - val_loss: 8.1598e-04
Epoch 79/100
14/14 [==============================] - 2s 119ms/step - loss: 2.4013e-04 - val_loss: 6.7061e-04
Epoch 80/100
14/14 [==============================] - 2s 119ms/step - loss: 3.1375e-04 - val_loss: 0.0012
Epoch 81/100
14/14 [==============================] - 2s 120ms/step - loss: 3.0737e-04 - val_loss: 0.0014
Epoch 82/100
14/14 [==============================] - 2s 119ms/step - loss: 2.5192e-04 - val_loss: 0.0011
Epoch 83/100
14/14 [==============================] - 2s 137ms/step - loss: 2.4572e-04 - val_loss: 0.0010
Epoch 84/100
14/14 [==============================] - 2s 155ms/step - loss: 2.5816e-04 - val_loss: 7.1704e-04
Epoch 85/100
14/14 [==============================] - 2s 138ms/step - loss: 2.3946e-04 - val_loss: 9.8093e-04
Epoch 86/100
14/14 [==============================] - 2s 133ms/step - loss: 2.3071e-04 - val_loss: 6.9482e-04
Epoch 87/100
14/14 [==============================] - 2s 129ms/step - loss: 2.5062e-04 - val_loss: 6.5913e-04
Epoch 88/100
14/14 [==============================] - 2s 123ms/step - loss: 2.4183e-04 - val_loss: 6.9999e-04
Epoch 89/100
14/14 [==============================] - 2s 121ms/step - loss: 2.4447e-04 - val_loss: 0.0013
Epoch 90/100
14/14 [==============================] - 2s 121ms/step - loss: 2.3850e-04 - val_loss: 7.0406e-04
Epoch 91/100
14/14 [==============================] - 2s 124ms/step - loss: 2.4112e-04 - val_loss: 0.0011
Epoch 92/100
14/14 [==============================] - 2s 133ms/step - loss: 2.3137e-04 - val_loss: 7.6536e-04
Epoch 93/100
14/14 [==============================] - 2s 135ms/step - loss: 2.6959e-04 - val_loss: 6.2377e-04
Epoch 94/100
14/14 [==============================] - 2s 140ms/step - loss: 2.3121e-04 - val_loss: 5.7497e-04
Epoch 95/100
14/14 [==============================] - 2s 128ms/step - loss: 2.3352e-04 - val_loss: 7.8791e-04
Epoch 96/100
14/14 [==============================] - 2s 128ms/step - loss: 2.1362e-04 - val_loss: 7.8944e-04
Epoch 97/100
14/14 [==============================] - 2s 121ms/step - loss: 2.1115e-04 - val_loss: 0.0010
Epoch 98/100
14/14 [==============================] - 2s 122ms/step - loss: 2.2986e-04 - val_loss: 5.8679e-04
Epoch 99/100
14/14 [==============================] - 2s 122ms/step - loss: 2.0538e-04 - val_loss: 7.3830e-04
Epoch 100/100
14/14 [==============================] - 2s 132ms/step - loss: 2.0567e-04 - val_loss: 6.0433e-04
Out[59]:
<keras.callbacks.History at 0x1d857546d90>
In [60]:
import tensorflow as tf
In [61]:
tf.__version__
Out[61]:
'2.12.0'
In [62]:
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
27/27 [==============================] - 2s 29ms/step
14/14 [==============================] - 1s 27ms/step
In [63]:
##Transformback to original form
train_predict=scaler.inverse_transform(train_predict)
test_predict=scaler.inverse_transform(test_predict)
In [64]:
### Calculate RMSE performance metrics
import math
from sklearn.metrics import mean_squared_error
math.sqrt(mean_squared_error(y_train,train_predict))
Out[64]:
0.014140821522976035
In [69]:
### Test Data RMSE
math.sqrt(mean_squared_error(ytest,test_predict))
Out[69]:
0.02458303470611413
In [105]:
### Plotting 
# shift train predictions for plotting
look_back=100
trainPredictPlot = numpy.empty_like(df1)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
# shift test predictions for plotting
testPredictPlot = numpy.empty_like(df1)
testPredictPlot[:, :] = numpy.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(df1)-1, :] = test_predict
# plot baseline and predictions
plt.plot(scaler.inverse_transform(df1),color='red')
plt.plot(trainPredictPlot,color='blue')
plt.plot(testPredictPlot,color='yellow')
plt.xlabel('Days',fontsize=18)
plt.ylabel('Price In rupees',fontsize=20)
plt.legend(["Original Price","train price","test price"],loc="upper left")
Out[105]:
<matplotlib.legend.Legend at 0x1d8670c30a0>
In [76]:
len(test_data)
Out[76]:
520
In [77]:
x_input=test_data[420:].reshape(1,-1)
x_input.shape
Out[77]:
(1, 100)
In [78]:
temp_input=list(x_input)
temp_input=temp_input[0].tolist()
In [79]:
temp_input
Out[79]:
[0.46854873855987594,
 0.4857391893855018,
 0.48249479444094706,
 0.473487966684422,
 0.4776039901215437,
 0.4639969008764709,
 0.4556680063919422,
 0.48157474214323753,
 0.4797346375478185,
 0.48084838506609845,
 0.45450583506851955,
 0.4670476006004551,
 0.5115006537213693,
 0.5116459251367971,
 0.5244782334995883,
 0.49726405500944254,
 0.5121785869933659,
 0.5170209675076267,
 0.5572127257759913,
 0.5391990702629412,
 0.5151808629122074,
 0.5191031911287589,
 0.48932255096605487,
 0.5063677303762528,
 0.5021548593288461,
 0.5178441721950509,
 0.5062224589608251,
 0.4871919035397801,
 0.48772456539634884,
 0.5148903200813519,
 0.5250593191612996,
 0.5225897050990267,
 0.48622342743692787,
 0.505108711442545,
 0.5474795409423272,
 0.5795360999467336,
 0.5913515084015302,
 0.6406469420367051,
 0.6450535083046824,
 0.6406469420367051,
 0.6521718076606458,
 0.6937194324730036,
 0.6758510483753812,
 0.6295094668539052,
 0.656239407292625,
 0.6745436056365308,
 0.6665536777880006,
 0.6639872161154423,
 0.6711055154714056,
 0.6971575226381288,
 0.7010314270495375,
 0.7157522638128904,
 0.7513437605927071,
 0.7454360563653091,
 0.8040772843930073,
 0.80635320323471,
 0.8359885719819862,
 0.8355043339305601,
 0.8402498668345357,
 0.870853711684664,
 0.9500750568979708,
 0.9831969396155149,
 1.0,
 0.9464916953174181,
 0.9014091327296496,
 0.9193743644375574,
 0.9473149000048422,
 0.9325940632414893,
 0.8750181589269284,
 0.8824270011137474,
 0.8624279695898502,
 0.8750665827320712,
 0.8527432085613285,
 0.8670282310783979,
 0.8739528352137911,
 0.8951624618662535,
 0.870611592658951,
 0.8931286620502636,
 0.8913369812599874,
 0.8895937242748535,
 0.8977289235388115,
 0.8804900489080432,
 0.8711926783206625,
 0.8844123771245942,
 0.9275095637015154,
 0.9331267250980579,
 0.9424240956854388,
 0.911142317563314,
 0.8738559876035059,
 0.8618953077332816,
 0.8756476683937822,
 0.8834439010217421,
 0.870127354607525,
 0.8831533581908866,
 0.9332719965134859,
 0.9517214662728195,
 0.9404871434797346,
 0.9350636773037624,
 0.9316740109437798,
 0.9051377657256305]
In [80]:
# demonstrate prediction for next 10 days
from numpy import array

lst_output=[]
n_steps=100
i=0
while(i<30):
    
    if(len(temp_input)>100):
        #print(temp_input)
        x_input=np.array(temp_input[1:])
        print("{} day input {}".format(i,x_input))
        x_input=x_input.reshape(1,-1)
        x_input = x_input.reshape((1, n_steps, 1))
        #print(x_input)
        yhat = model.predict(x_input, verbose=0)
        print("{} day output {}".format(i,yhat))
        temp_input.extend(yhat[0].tolist())
        temp_input=temp_input[1:]
        #print(temp_input)
        lst_output.extend(yhat.tolist())
        i=i+1
    else:
        x_input = x_input.reshape((1, n_steps,1))
        yhat = model.predict(x_input, verbose=0)
        print(yhat[0])
        temp_input.extend(yhat[0].tolist())
        print(len(temp_input))
        lst_output.extend(yhat.tolist())
        i=i+1
    

print(lst_output)
[0.8919715]
101
1 day input [0.48573919 0.48249479 0.47348797 0.47760399 0.4639969  0.45566801
 0.48157474 0.47973464 0.48084839 0.45450584 0.4670476  0.51150065
 0.51164593 0.52447823 0.49726406 0.51217859 0.51702097 0.55721273
 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486
 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932
 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151
 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105
 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552
 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728
 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371 0.95007506
 0.98319694 1.         0.9464917  0.90140913 0.91937436 0.9473149
 0.93259406 0.87501816 0.882427   0.86242797 0.87506658 0.85274321
 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698
 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956
 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767
 0.8834439  0.87012735 0.88315336 0.933272   0.95172147 0.94048714
 0.93506368 0.93167401 0.90513777 0.89197153]
1 day output [[0.87675655]]
2 day input [0.48249479 0.47348797 0.47760399 0.4639969  0.45566801 0.48157474
 0.47973464 0.48084839 0.45450584 0.4670476  0.51150065 0.51164593
 0.52447823 0.49726406 0.51217859 0.51702097 0.55721273 0.53919907
 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417
 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971
 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694
 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947
 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752
 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532
 0.83598857 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694
 1.         0.9464917  0.90140913 0.91937436 0.9473149  0.93259406
 0.87501816 0.882427   0.86242797 0.87506658 0.85274321 0.86702823
 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372
 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673
 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767 0.8834439
 0.87012735 0.88315336 0.933272   0.95172147 0.94048714 0.93506368
 0.93167401 0.90513777 0.89197153 0.87675655]
2 day output [[0.86094797]]
3 day input [0.47348797 0.47760399 0.4639969  0.45566801 0.48157474 0.47973464
 0.48084839 0.45450584 0.4670476  0.51150065 0.51164593 0.52447823
 0.49726406 0.51217859 0.51702097 0.55721273 0.53919907 0.51518086
 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246
 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971 0.48622343
 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351
 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941
 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143
 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857
 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694 1.
 0.9464917  0.90140913 0.91937436 0.9473149  0.93259406 0.87501816
 0.882427   0.86242797 0.87506658 0.85274321 0.86702823 0.87395284
 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892
 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241
 0.91114232 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735
 0.88315336 0.933272   0.95172147 0.94048714 0.93506368 0.93167401
 0.90513777 0.89197153 0.87675655 0.86094797]
3 day output [[0.84559965]]
4 day input [0.47760399 0.4639969  0.45566801 0.48157474 0.47973464 0.48084839
 0.45450584 0.4670476  0.51150065 0.51164593 0.52447823 0.49726406
 0.51217859 0.51702097 0.55721273 0.53919907 0.51518086 0.51910319
 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919
 0.48772457 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871
 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694
 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361
 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226
 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433
 0.84024987 0.87085371 0.95007506 0.98319694 1.         0.9464917
 0.90140913 0.91937436 0.9473149  0.93259406 0.87501816 0.882427
 0.86242797 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246
 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005
 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232
 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336
 0.933272   0.95172147 0.94048714 0.93506368 0.93167401 0.90513777
 0.89197153 0.87675655 0.86094797 0.84559965]
4 day output [[0.8310815]]
5 day input [0.4639969  0.45566801 0.48157474 0.47973464 0.48084839 0.45450584
 0.4670476  0.51150065 0.51164593 0.52447823 0.49726406 0.51217859
 0.51702097 0.55721273 0.53919907 0.51518086 0.51910319 0.48932255
 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457
 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954
 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694 0.65217181
 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368
 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376
 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987
 0.87085371 0.95007506 0.98319694 1.         0.9464917  0.90140913
 0.91937436 0.9473149  0.93259406 0.87501816 0.882427   0.86242797
 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159
 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268
 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599
 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336 0.933272
 0.95172147 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153
 0.87675655 0.86094797 0.84559965 0.83108151]
5 day output [[0.8175638]]
6 day input [0.45566801 0.48157474 0.47973464 0.48084839 0.45450584 0.4670476
 0.51150065 0.51164593 0.52447823 0.49726406 0.51217859 0.51702097
 0.55721273 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773
 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032
 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361
 0.59135151 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943
 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722
 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606
 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371
 0.95007506 0.98319694 1.         0.9464917  0.90140913 0.91937436
 0.9473149  0.93259406 0.87501816 0.882427   0.86242797 0.87506658
 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866
 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238
 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531
 0.87564767 0.8834439  0.87012735 0.88315336 0.933272   0.95172147
 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655
 0.86094797 0.84559965 0.83108151 0.81756377]
6 day output [[0.8051158]]
7 day input [0.48157474 0.47973464 0.48084839 0.45450584 0.4670476  0.51150065
 0.51164593 0.52447823 0.49726406 0.51217859 0.51702097 0.55721273
 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486
 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932
 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151
 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105
 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552
 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728
 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371 0.95007506
 0.98319694 1.         0.9464917  0.90140913 0.91937436 0.9473149
 0.93259406 0.87501816 0.882427   0.86242797 0.87506658 0.85274321
 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698
 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956
 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767
 0.8834439  0.87012735 0.88315336 0.933272   0.95172147 0.94048714
 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797
 0.84559965 0.83108151 0.81756377 0.80511582]
7 day output [[0.7937276]]
8 day input [0.47973464 0.48084839 0.45450584 0.4670476  0.51150065 0.51164593
 0.52447823 0.49726406 0.51217859 0.51702097 0.55721273 0.53919907
 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417
 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971
 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694
 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947
 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752
 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532
 0.83598857 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694
 1.         0.9464917  0.90140913 0.91937436 0.9473149  0.93259406
 0.87501816 0.882427   0.86242797 0.87506658 0.85274321 0.86702823
 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372
 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673
 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767 0.8834439
 0.87012735 0.88315336 0.933272   0.95172147 0.94048714 0.93506368
 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965
 0.83108151 0.81756377 0.80511582 0.79372758]
8 day output [[0.78332597]]
9 day input [0.48084839 0.45450584 0.4670476  0.51150065 0.51164593 0.52447823
 0.49726406 0.51217859 0.51702097 0.55721273 0.53919907 0.51518086
 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246
 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971 0.48622343
 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351
 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941
 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143
 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857
 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694 1.
 0.9464917  0.90140913 0.91937436 0.9473149  0.93259406 0.87501816
 0.882427   0.86242797 0.87506658 0.85274321 0.86702823 0.87395284
 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892
 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241
 0.91114232 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735
 0.88315336 0.933272   0.95172147 0.94048714 0.93506368 0.93167401
 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151
 0.81756377 0.80511582 0.79372758 0.78332597]
9 day output [[0.77379394]]
10 day input [0.45450584 0.4670476  0.51150065 0.51164593 0.52447823 0.49726406
 0.51217859 0.51702097 0.55721273 0.53919907 0.51518086 0.51910319
 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919
 0.48772457 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871
 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694
 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361
 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226
 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433
 0.84024987 0.87085371 0.95007506 0.98319694 1.         0.9464917
 0.90140913 0.91937436 0.9473149  0.93259406 0.87501816 0.882427
 0.86242797 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246
 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005
 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232
 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336
 0.933272   0.95172147 0.94048714 0.93506368 0.93167401 0.90513777
 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377
 0.80511582 0.79372758 0.78332597 0.77379394]
10 day output [[0.76499057]]
11 day input [0.4670476  0.51150065 0.51164593 0.52447823 0.49726406 0.51217859
 0.51702097 0.55721273 0.53919907 0.51518086 0.51910319 0.48932255
 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457
 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954
 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694 0.65217181
 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368
 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376
 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987
 0.87085371 0.95007506 0.98319694 1.         0.9464917  0.90140913
 0.91937436 0.9473149  0.93259406 0.87501816 0.882427   0.86242797
 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159
 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268
 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599
 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336 0.933272
 0.95172147 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153
 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582
 0.79372758 0.78332597 0.77379394 0.76499057]
11 day output [[0.7567651]]
12 day input [0.51150065 0.51164593 0.52447823 0.49726406 0.51217859 0.51702097
 0.55721273 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773
 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032
 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361
 0.59135151 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943
 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722
 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606
 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371
 0.95007506 0.98319694 1.         0.9464917  0.90140913 0.91937436
 0.9473149  0.93259406 0.87501816 0.882427   0.86242797 0.87506658
 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866
 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238
 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531
 0.87564767 0.8834439  0.87012735 0.88315336 0.933272   0.95172147
 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655
 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758
 0.78332597 0.77379394 0.76499057 0.75676513]
12 day output [[0.7489709]]
13 day input [0.51164593 0.52447823 0.49726406 0.51217859 0.51702097 0.55721273
 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486
 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932
 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151
 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105
 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552
 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728
 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371 0.95007506
 0.98319694 1.         0.9464917  0.90140913 0.91937436 0.9473149
 0.93259406 0.87501816 0.882427   0.86242797 0.87506658 0.85274321
 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698
 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956
 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767
 0.8834439  0.87012735 0.88315336 0.933272   0.95172147 0.94048714
 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797
 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597
 0.77379394 0.76499057 0.75676513 0.74897093]
13 day output [[0.74147487]]
14 day input [0.52447823 0.49726406 0.51217859 0.51702097 0.55721273 0.53919907
 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417
 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971
 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694
 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947
 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752
 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532
 0.83598857 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694
 1.         0.9464917  0.90140913 0.91937436 0.9473149  0.93259406
 0.87501816 0.882427   0.86242797 0.87506658 0.85274321 0.86702823
 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372
 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673
 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767 0.8834439
 0.87012735 0.88315336 0.933272   0.95172147 0.94048714 0.93506368
 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965
 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394
 0.76499057 0.75676513 0.74897093 0.74147487]
14 day output [[0.73416233]]
15 day input [0.49726406 0.51217859 0.51702097 0.55721273 0.53919907 0.51518086
 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246
 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971 0.48622343
 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351
 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941
 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143
 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857
 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694 1.
 0.9464917  0.90140913 0.91937436 0.9473149  0.93259406 0.87501816
 0.882427   0.86242797 0.87506658 0.85274321 0.86702823 0.87395284
 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892
 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241
 0.91114232 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735
 0.88315336 0.933272   0.95172147 0.94048714 0.93506368 0.93167401
 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151
 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057
 0.75676513 0.74897093 0.74147487 0.73416233]
15 day output [[0.72694296]]
16 day input [0.51217859 0.51702097 0.55721273 0.53919907 0.51518086 0.51910319
 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919
 0.48772457 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871
 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694
 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361
 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226
 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433
 0.84024987 0.87085371 0.95007506 0.98319694 1.         0.9464917
 0.90140913 0.91937436 0.9473149  0.93259406 0.87501816 0.882427
 0.86242797 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246
 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005
 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232
 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336
 0.933272   0.95172147 0.94048714 0.93506368 0.93167401 0.90513777
 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377
 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513
 0.74897093 0.74147487 0.73416233 0.72694296]
16 day output [[0.7197505]]
17 day input [0.51702097 0.55721273 0.53919907 0.51518086 0.51910319 0.48932255
 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457
 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954
 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694 0.65217181
 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368
 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376
 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987
 0.87085371 0.95007506 0.98319694 1.         0.9464917  0.90140913
 0.91937436 0.9473149  0.93259406 0.87501816 0.882427   0.86242797
 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159
 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268
 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599
 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336 0.933272
 0.95172147 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153
 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582
 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513 0.74897093
 0.74147487 0.73416233 0.72694296 0.71975052]
17 day output [[0.71254283]]
18 day input [0.55721273 0.53919907 0.51518086 0.51910319 0.48932255 0.50636773
 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032
 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361
 0.59135151 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943
 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722
 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606
 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371
 0.95007506 0.98319694 1.         0.9464917  0.90140913 0.91937436
 0.9473149  0.93259406 0.87501816 0.882427   0.86242797 0.87506658
 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866
 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238
 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531
 0.87564767 0.8834439  0.87012735 0.88315336 0.933272   0.95172147
 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655
 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758
 0.78332597 0.77379394 0.76499057 0.75676513 0.74897093 0.74147487
 0.73416233 0.72694296 0.71975052 0.71254283]
18 day output [[0.7053]]
19 day input [0.53919907 0.51518086 0.51910319 0.48932255 0.50636773 0.50215486
 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932
 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151
 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105
 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552
 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728
 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371 0.95007506
 0.98319694 1.         0.9464917  0.90140913 0.91937436 0.9473149
 0.93259406 0.87501816 0.882427   0.86242797 0.87506658 0.85274321
 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698
 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956
 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767
 0.8834439  0.87012735 0.88315336 0.933272   0.95172147 0.94048714
 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797
 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597
 0.77379394 0.76499057 0.75676513 0.74897093 0.74147487 0.73416233
 0.72694296 0.71975052 0.71254283 0.70529997]
19 day output [[0.698021]]
20 day input [0.51518086 0.51910319 0.48932255 0.50636773 0.50215486 0.51784417
 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971
 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694
 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947
 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752
 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532
 0.83598857 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694
 1.         0.9464917  0.90140913 0.91937436 0.9473149  0.93259406
 0.87501816 0.882427   0.86242797 0.87506658 0.85274321 0.86702823
 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372
 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673
 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767 0.8834439
 0.87012735 0.88315336 0.933272   0.95172147 0.94048714 0.93506368
 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965
 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394
 0.76499057 0.75676513 0.74897093 0.74147487 0.73416233 0.72694296
 0.71975052 0.71254283 0.70529997 0.69802099]
20 day output [[0.69071895]]
21 day input [0.51910319 0.48932255 0.50636773 0.50215486 0.51784417 0.50622246
 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971 0.48622343
 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351
 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941
 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143
 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857
 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694 1.
 0.9464917  0.90140913 0.91937436 0.9473149  0.93259406 0.87501816
 0.882427   0.86242797 0.87506658 0.85274321 0.86702823 0.87395284
 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892
 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241
 0.91114232 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735
 0.88315336 0.933272   0.95172147 0.94048714 0.93506368 0.93167401
 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151
 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057
 0.75676513 0.74897093 0.74147487 0.73416233 0.72694296 0.71975052
 0.71254283 0.70529997 0.69802099 0.69071895]
21 day output [[0.68341875]]
22 day input [0.48932255 0.50636773 0.50215486 0.51784417 0.50622246 0.4871919
 0.48772457 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871
 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694
 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361
 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226
 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433
 0.84024987 0.87085371 0.95007506 0.98319694 1.         0.9464917
 0.90140913 0.91937436 0.9473149  0.93259406 0.87501816 0.882427
 0.86242797 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246
 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005
 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232
 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336
 0.933272   0.95172147 0.94048714 0.93506368 0.93167401 0.90513777
 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377
 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513
 0.74897093 0.74147487 0.73416233 0.72694296 0.71975052 0.71254283
 0.70529997 0.69802099 0.69071895 0.68341875]
22 day output [[0.67615265]]
23 day input [0.50636773 0.50215486 0.51784417 0.50622246 0.4871919  0.48772457
 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954
 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694 0.65217181
 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368
 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376
 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987
 0.87085371 0.95007506 0.98319694 1.         0.9464917  0.90140913
 0.91937436 0.9473149  0.93259406 0.87501816 0.882427   0.86242797
 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159
 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268
 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599
 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336 0.933272
 0.95172147 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153
 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582
 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513 0.74897093
 0.74147487 0.73416233 0.72694296 0.71975052 0.71254283 0.70529997
 0.69802099 0.69071895 0.68341875 0.67615265]
23 day output [[0.66895497]]
24 day input [0.50215486 0.51784417 0.50622246 0.4871919  0.48772457 0.51489032
 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361
 0.59135151 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943
 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722
 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606
 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371
 0.95007506 0.98319694 1.         0.9464917  0.90140913 0.91937436
 0.9473149  0.93259406 0.87501816 0.882427   0.86242797 0.87506658
 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866
 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238
 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531
 0.87564767 0.8834439  0.87012735 0.88315336 0.933272   0.95172147
 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655
 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758
 0.78332597 0.77379394 0.76499057 0.75676513 0.74897093 0.74147487
 0.73416233 0.72694296 0.71975052 0.71254283 0.70529997 0.69802099
 0.69071895 0.68341875 0.67615265 0.66895497]
24 day output [[0.66186106]]
25 day input [0.51784417 0.50622246 0.4871919  0.48772457 0.51489032 0.52505932
 0.52258971 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151
 0.64064694 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105
 0.62950947 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552
 0.69715752 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728
 0.8063532  0.83598857 0.83550433 0.84024987 0.87085371 0.95007506
 0.98319694 1.         0.9464917  0.90140913 0.91937436 0.9473149
 0.93259406 0.87501816 0.882427   0.86242797 0.87506658 0.85274321
 0.86702823 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698
 0.88959372 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956
 0.93312673 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767
 0.8834439  0.87012735 0.88315336 0.933272   0.95172147 0.94048714
 0.93506368 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797
 0.84559965 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597
 0.77379394 0.76499057 0.75676513 0.74897093 0.74147487 0.73416233
 0.72694296 0.71975052 0.71254283 0.70529997 0.69802099 0.69071895
 0.68341875 0.67615265 0.66895497 0.66186106]
25 day output [[0.65490395]]
26 day input [0.50622246 0.4871919  0.48772457 0.51489032 0.52505932 0.52258971
 0.48622343 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694
 0.64505351 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947
 0.65623941 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752
 0.70103143 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532
 0.83598857 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694
 1.         0.9464917  0.90140913 0.91937436 0.9473149  0.93259406
 0.87501816 0.882427   0.86242797 0.87506658 0.85274321 0.86702823
 0.87395284 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372
 0.89772892 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673
 0.9424241  0.91114232 0.87385599 0.86189531 0.87564767 0.8834439
 0.87012735 0.88315336 0.933272   0.95172147 0.94048714 0.93506368
 0.93167401 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965
 0.83108151 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394
 0.76499057 0.75676513 0.74897093 0.74147487 0.73416233 0.72694296
 0.71975052 0.71254283 0.70529997 0.69802099 0.69071895 0.68341875
 0.67615265 0.66895497 0.66186106 0.65490395]
26 day output [[0.6481117]]
27 day input [0.4871919  0.48772457 0.51489032 0.52505932 0.52258971 0.48622343
 0.50510871 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351
 0.64064694 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941
 0.67454361 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143
 0.71575226 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857
 0.83550433 0.84024987 0.87085371 0.95007506 0.98319694 1.
 0.9464917  0.90140913 0.91937436 0.9473149  0.93259406 0.87501816
 0.882427   0.86242797 0.87506658 0.85274321 0.86702823 0.87395284
 0.89516246 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892
 0.88049005 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241
 0.91114232 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735
 0.88315336 0.933272   0.95172147 0.94048714 0.93506368 0.93167401
 0.90513777 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151
 0.81756377 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057
 0.75676513 0.74897093 0.74147487 0.73416233 0.72694296 0.71975052
 0.71254283 0.70529997 0.69802099 0.69071895 0.68341875 0.67615265
 0.66895497 0.66186106 0.65490395 0.6481117 ]
27 day output [[0.6415083]]
28 day input [0.48772457 0.51489032 0.52505932 0.52258971 0.48622343 0.50510871
 0.54747954 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694
 0.65217181 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361
 0.66655368 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226
 0.75134376 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433
 0.84024987 0.87085371 0.95007506 0.98319694 1.         0.9464917
 0.90140913 0.91937436 0.9473149  0.93259406 0.87501816 0.882427
 0.86242797 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246
 0.87061159 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005
 0.87119268 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232
 0.87385599 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336
 0.933272   0.95172147 0.94048714 0.93506368 0.93167401 0.90513777
 0.89197153 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377
 0.80511582 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513
 0.74897093 0.74147487 0.73416233 0.72694296 0.71975052 0.71254283
 0.70529997 0.69802099 0.69071895 0.68341875 0.67615265 0.66895497
 0.66186106 0.65490395 0.6481117  0.64150828]
28 day output [[0.6351101]]
29 day input [0.51489032 0.52505932 0.52258971 0.48622343 0.50510871 0.54747954
 0.5795361  0.59135151 0.64064694 0.64505351 0.64064694 0.65217181
 0.69371943 0.67585105 0.62950947 0.65623941 0.67454361 0.66655368
 0.66398722 0.67110552 0.69715752 0.70103143 0.71575226 0.75134376
 0.74543606 0.80407728 0.8063532  0.83598857 0.83550433 0.84024987
 0.87085371 0.95007506 0.98319694 1.         0.9464917  0.90140913
 0.91937436 0.9473149  0.93259406 0.87501816 0.882427   0.86242797
 0.87506658 0.85274321 0.86702823 0.87395284 0.89516246 0.87061159
 0.89312866 0.89133698 0.88959372 0.89772892 0.88049005 0.87119268
 0.88441238 0.92750956 0.93312673 0.9424241  0.91114232 0.87385599
 0.86189531 0.87564767 0.8834439  0.87012735 0.88315336 0.933272
 0.95172147 0.94048714 0.93506368 0.93167401 0.90513777 0.89197153
 0.87675655 0.86094797 0.84559965 0.83108151 0.81756377 0.80511582
 0.79372758 0.78332597 0.77379394 0.76499057 0.75676513 0.74897093
 0.74147487 0.73416233 0.72694296 0.71975052 0.71254283 0.70529997
 0.69802099 0.69071895 0.68341875 0.67615265 0.66895497 0.66186106
 0.65490395 0.6481117  0.64150828 0.63511008]
29 day output [[0.6289286]]
[[0.8919715285301208], [0.8767565488815308], [0.8609479665756226], [0.8455996513366699], [0.8310815095901489], [0.8175637722015381], [0.805115818977356], [0.7937275767326355], [0.7833259701728821], [0.7737939357757568], [0.7649905681610107], [0.7567651271820068], [0.7489709258079529], [0.7414748668670654], [0.7341623306274414], [0.7269429564476013], [0.7197505235671997], [0.7125428318977356], [0.705299973487854], [0.6980209946632385], [0.690718948841095], [0.6834187507629395], [0.6761526465415955], [0.6689549684524536], [0.6618610620498657], [0.6549039483070374], [0.6481117010116577], [0.6415082812309265], [0.635110080242157], [0.6289286017417908]]
In [89]:
day_new=np.arange(1,101)
day_pred=np.arange(101,131)
In [90]:
len(df1)
Out[90]:
1485
In [99]:
plt.plot(day_new,scaler.inverse_transform(df1[1385:]),color='red')
plt.plot(day_pred,scaler.inverse_transform(lst_output),color='blue')
plt.xlabel('Days',fontsize=20,color='black')
plt.ylabel('Price In rupees',fontsize=20,color='black')

plt.legend(["Original Price","predicted price"],loc="lower right") #predicted rice for next 10 days
Out[99]:
<matplotlib.legend.Legend at 0x1d866e378b0>
In [96]:
df3=df1.tolist()
df3.extend(lst_output)
plt.plot(df3[1400:],color='black')
Out[96]:
[<matplotlib.lines.Line2D at 0x1d866cebeb0>]
In [123]:
df3=scaler.inverse_transform(df3).tolist()
In [100]:
plt.plot(df3,color='blue')
plt.xlabel('Days',fontsize=20)
plt.ylabel('Price In rupees',fontsize=20)
Out[100]:
Text(0, 0.5, 'Price In rupees')
In [ ]: